Importing necessary libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import pandas_profiling
Importing Training Dataset
dataset_train = pd.read_csv('NSE-TATAGLOBAL.csv')
training_set = dataset_train.iloc[:, 1:2].values
dataset_train.head()
| Date | Open | High | Low | Last | Close | Total Trade Quantity | Turnover (Lacs) | |
|---|---|---|---|---|---|---|---|---|
| 0 | 2018-09-28 | 234.05 | 235.95 | 230.20 | 233.50 | 233.75 | 3069914 | 7162.35 |
| 1 | 2018-09-27 | 234.55 | 236.80 | 231.10 | 233.80 | 233.25 | 5082859 | 11859.95 |
| 2 | 2018-09-26 | 240.00 | 240.00 | 232.50 | 235.00 | 234.25 | 2240909 | 5248.60 |
| 3 | 2018-09-25 | 233.30 | 236.75 | 232.00 | 236.25 | 236.10 | 2349368 | 5503.90 |
| 4 | 2018-09-24 | 233.55 | 239.20 | 230.75 | 234.00 | 233.30 | 3423509 | 7999.55 |
dataset_train.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 2035 entries, 0 to 2034 Data columns (total 8 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Date 2035 non-null object 1 Open 2035 non-null float64 2 High 2035 non-null float64 3 Low 2035 non-null float64 4 Last 2035 non-null float64 5 Close 2035 non-null float64 6 Total Trade Quantity 2035 non-null int64 7 Turnover (Lacs) 2035 non-null float64 dtypes: float64(6), int64(1), object(1) memory usage: 127.3+ KB
pandas_profiling.ProfileReport(dataset_train)
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler(feature_range = (0, 1))
training_set_scaled = sc.fit_transform(training_set)
training_set_scaled
array([[0.6202352 ],
[0.62226277],
[0.64436334],
...,
[0.16504461],
[0.15896188],
[0.16626115]])
X_train = []
y_train = []
for i in range(60, 2035):
X_train.append(training_set_scaled[i-60:i, 0])
y_train.append(training_set_scaled[i, 0])
X_train, y_train = np.array(X_train), np.array(y_train)
X_train
array([[0.6202352 , 0.62226277, 0.64436334, ..., 0.79622871, 0.81062449,
0.74371452],
[0.62226277, 0.64436334, 0.61719384, ..., 0.81062449, 0.74371452,
0.77007299],
[0.64436334, 0.61719384, 0.61820762, ..., 0.74371452, 0.77007299,
0.73641525],
...,
[0.19870235, 0.21796431, 0.21553122, ..., 0.14963504, 0.14801298,
0.15815085],
[0.21796431, 0.21553122, 0.20600162, ..., 0.14801298, 0.15815085,
0.16504461],
[0.21553122, 0.20600162, 0.21654501, ..., 0.15815085, 0.16504461,
0.15896188]])
y_train
array([0.77007299, 0.73641525, 0.73763179, ..., 0.16504461, 0.15896188,
0.16626115])
X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
X_train
array([[[0.6202352 ],
[0.62226277],
[0.64436334],
...,
[0.79622871],
[0.81062449],
[0.74371452]],
[[0.62226277],
[0.64436334],
[0.61719384],
...,
[0.81062449],
[0.74371452],
[0.77007299]],
[[0.64436334],
[0.61719384],
[0.61820762],
...,
[0.74371452],
[0.77007299],
[0.73641525]],
...,
[[0.19870235],
[0.21796431],
[0.21553122],
...,
[0.14963504],
[0.14801298],
[0.15815085]],
[[0.21796431],
[0.21553122],
[0.20600162],
...,
[0.14801298],
[0.15815085],
[0.16504461]],
[[0.21553122],
[0.20600162],
[0.21654501],
...,
[0.15815085],
[0.16504461],
[0.15896188]]])
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
Initialising the Recurrent Neural Network (RNN)
regressor = Sequential()
Adding first LSTM layer and some dropouts
regressor.add(LSTM(units = 50, return_sequences = True, input_shape = (X_train.shape[1], 1)))
regressor.add(Dropout(0.2))
Adding second LSTM layer and some dropouts
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
Adding third LSTM layer and some dropouts
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
Adding fourth LSTM layer and some dropouts
regressor.add(LSTM(units = 50))
regressor.add(Dropout(0.2))
Adding the output layer
regressor.add(Dense(units = 1))
# Compiling the RNN
regressor.compile(optimizer = 'adam', loss = 'mean_squared_error')
# Fitting the RNN to Training Data Set
regressor.fit(X_train, y_train, epochs = 100, batch_size = 32)
Epoch 1/100 62/62 [==============================] - 11s 59ms/step - loss: 0.0105 Epoch 2/100 62/62 [==============================] - 4s 63ms/step - loss: 0.0030 Epoch 3/100 62/62 [==============================] - 4s 58ms/step - loss: 0.0033 Epoch 4/100 62/62 [==============================] - 3s 55ms/step - loss: 0.0026 Epoch 5/100 62/62 [==============================] - 3s 50ms/step - loss: 0.0023 Epoch 6/100 62/62 [==============================] - 4s 65ms/step - loss: 0.0021 Epoch 7/100 62/62 [==============================] - 4s 58ms/step - loss: 0.0020 Epoch 8/100 62/62 [==============================] - 4s 58ms/step - loss: 0.0020 Epoch 9/100 62/62 [==============================] - 3s 56ms/step - loss: 0.0018 Epoch 10/100 62/62 [==============================] - 3s 55ms/step - loss: 0.0019 Epoch 11/100 62/62 [==============================] - 4s 64ms/step - loss: 0.0018 Epoch 12/100 62/62 [==============================] - 4s 61ms/step - loss: 0.0016 Epoch 13/100 62/62 [==============================] - 5s 74ms/step - loss: 0.0017 Epoch 14/100 62/62 [==============================] - 4s 63ms/step - loss: 0.0017 Epoch 15/100 62/62 [==============================] - 4s 68ms/step - loss: 0.0017 Epoch 16/100 62/62 [==============================] - 4s 58ms/step - loss: 0.0014 Epoch 17/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0015 Epoch 18/100 62/62 [==============================] - 4s 58ms/step - loss: 0.0015 Epoch 19/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0015 Epoch 20/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0015 Epoch 21/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0012 Epoch 22/100 62/62 [==============================] - 4s 56ms/step - loss: 0.0015 Epoch 23/100 62/62 [==============================] - 4s 58ms/step - loss: 0.0013 Epoch 24/100 62/62 [==============================] - 3s 56ms/step - loss: 0.0012 Epoch 25/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0014 Epoch 26/100 62/62 [==============================] - 4s 59ms/step - loss: 0.0012 Epoch 27/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0012 Epoch 28/100 62/62 [==============================] - 4s 59ms/step - loss: 0.0012 Epoch 29/100 62/62 [==============================] - 4s 69ms/step - loss: 0.0012 Epoch 30/100 62/62 [==============================] - 4s 64ms/step - loss: 0.0011 Epoch 31/100 62/62 [==============================] - 3s 55ms/step - loss: 0.0012 Epoch 32/100 62/62 [==============================] - 4s 62ms/step - loss: 0.0012 Epoch 33/100 62/62 [==============================] - 4s 57ms/step - loss: 0.0011 Epoch 34/100 62/62 [==============================] - 6s 89ms/step - loss: 0.0010 Epoch 35/100 62/62 [==============================] - 3s 55ms/step - loss: 0.0010 Epoch 36/100 62/62 [==============================] - 4s 70ms/step - loss: 9.9000e-04 Epoch 37/100 62/62 [==============================] - 3s 54ms/step - loss: 0.0011 Epoch 38/100 62/62 [==============================] - 3s 54ms/step - loss: 9.8947e-04 Epoch 39/100 62/62 [==============================] - 3s 56ms/step - loss: 0.0011 Epoch 40/100 62/62 [==============================] - 4s 57ms/step - loss: 9.6000e-04 Epoch 41/100 62/62 [==============================] - 4s 58ms/step - loss: 9.7817e-04 Epoch 42/100 62/62 [==============================] - 4s 60ms/step - loss: 9.6022e-04 Epoch 43/100 62/62 [==============================] - 3s 56ms/step - loss: 8.2205e-04 Epoch 44/100 62/62 [==============================] - 4s 57ms/step - loss: 7.9079e-04 Epoch 45/100 62/62 [==============================] - 3s 56ms/step - loss: 9.2386e-04 Epoch 46/100 62/62 [==============================] - 3s 56ms/step - loss: 9.2446e-04 Epoch 47/100 62/62 [==============================] - 4s 58ms/step - loss: 8.0497e-04 Epoch 48/100 62/62 [==============================] - 5s 79ms/step - loss: 8.4877e-04 Epoch 49/100 62/62 [==============================] - 5s 80ms/step - loss: 8.3028e-04 Epoch 50/100 62/62 [==============================] - 3s 55ms/step - loss: 8.0082e-04 Epoch 51/100 62/62 [==============================] - 3s 54ms/step - loss: 9.8309e-04 Epoch 52/100 62/62 [==============================] - 3s 54ms/step - loss: 8.7889e-04 Epoch 53/100 62/62 [==============================] - 4s 68ms/step - loss: 8.0044e-04 Epoch 54/100 62/62 [==============================] - 4s 59ms/step - loss: 7.9293e-04 Epoch 55/100 62/62 [==============================] - 4s 59ms/step - loss: 0.0011 Epoch 56/100 62/62 [==============================] - 3s 54ms/step - loss: 9.0618e-04 Epoch 57/100 62/62 [==============================] - 3s 52ms/step - loss: 8.0038e-04 Epoch 58/100 62/62 [==============================] - 4s 59ms/step - loss: 8.0019e-04 Epoch 59/100 62/62 [==============================] - 4s 60ms/step - loss: 7.8635e-04 Epoch 60/100 62/62 [==============================] - 3s 51ms/step - loss: 7.8709e-04 Epoch 61/100 62/62 [==============================] - 3s 51ms/step - loss: 7.2873e-04 Epoch 62/100 62/62 [==============================] - 4s 58ms/step - loss: 7.1458e-04 Epoch 63/100 62/62 [==============================] - 3s 54ms/step - loss: 9.0361e-04 Epoch 64/100 62/62 [==============================] - 4s 58ms/step - loss: 7.8749e-04 Epoch 65/100 62/62 [==============================] - 4s 60ms/step - loss: 6.9319e-04 Epoch 66/100 62/62 [==============================] - 3s 53ms/step - loss: 6.9370e-04 Epoch 67/100 62/62 [==============================] - 4s 60ms/step - loss: 7.0590e-04 Epoch 68/100 62/62 [==============================] - 4s 57ms/step - loss: 7.2448e-04 Epoch 69/100 62/62 [==============================] - 3s 52ms/step - loss: 7.1266e-04 Epoch 70/100 62/62 [==============================] - 4s 59ms/step - loss: 6.4555e-04 Epoch 71/100 62/62 [==============================] - 3s 53ms/step - loss: 6.8646e-04 Epoch 72/100 62/62 [==============================] - 3s 54ms/step - loss: 7.3351e-04 Epoch 73/100 62/62 [==============================] - 4s 61ms/step - loss: 7.5763e-04 Epoch 74/100 62/62 [==============================] - 3s 53ms/step - loss: 6.9943e-04 Epoch 75/100 62/62 [==============================] - 3s 50ms/step - loss: 6.1755e-04 Epoch 76/100 62/62 [==============================] - 3s 53ms/step - loss: 7.0235e-04 Epoch 77/100 62/62 [==============================] - 3s 53ms/step - loss: 6.5054e-04 Epoch 78/100 62/62 [==============================] - 3s 51ms/step - loss: 7.2258e-04 Epoch 79/100 62/62 [==============================] - 3s 51ms/step - loss: 7.6520e-04 Epoch 80/100 62/62 [==============================] - 3s 53ms/step - loss: 6.7132e-04 Epoch 81/100 62/62 [==============================] - 3s 53ms/step - loss: 6.9923e-04 Epoch 82/100 62/62 [==============================] - 4s 58ms/step - loss: 6.6450e-04 Epoch 83/100 62/62 [==============================] - 4s 72ms/step - loss: 7.3477e-04 Epoch 84/100 62/62 [==============================] - 4s 70ms/step - loss: 7.0619e-04 Epoch 85/100 62/62 [==============================] - 4s 65ms/step - loss: 5.5322e-04 Epoch 86/100 62/62 [==============================] - 4s 66ms/step - loss: 5.8852e-04 Epoch 87/100 62/62 [==============================] - 4s 63ms/step - loss: 6.9325e-04 Epoch 88/100 62/62 [==============================] - 4s 70ms/step - loss: 6.6247e-04 Epoch 89/100 62/62 [==============================] - 5s 76ms/step - loss: 6.1619e-04 Epoch 90/100 62/62 [==============================] - 4s 64ms/step - loss: 6.3828e-04 Epoch 91/100 62/62 [==============================] - 4s 70ms/step - loss: 5.0287e-04 Epoch 92/100 62/62 [==============================] - 4s 63ms/step - loss: 6.3826e-04 Epoch 93/100 62/62 [==============================] - 4s 63ms/step - loss: 6.9886e-04 Epoch 94/100 62/62 [==============================] - 4s 67ms/step - loss: 8.7439e-04 Epoch 95/100 62/62 [==============================] - 4s 61ms/step - loss: 7.5271e-04 Epoch 96/100 62/62 [==============================] - 4s 64ms/step - loss: 6.6443e-04 Epoch 97/100 62/62 [==============================] - 4s 62ms/step - loss: 5.6374e-04 Epoch 98/100 62/62 [==============================] - 4s 61ms/step - loss: 6.1285e-04 Epoch 99/100 62/62 [==============================] - 4s 62ms/step - loss: 5.7517e-04 Epoch 100/100 62/62 [==============================] - 4s 64ms/step - loss: 6.5031e-04
<keras.callbacks.History at 0x1e92b7d3400>
Getting the real stock price of TATAGLOBAL in 2017
dataset_test = pd.read_csv('tatatest.csv')
real_stock_price = dataset_test.iloc[:, 1:2].values
dataset_test
| Date | Open | High | Low | Last | Close | Total Trade Quantity | Turnover (Lacs) | |
|---|---|---|---|---|---|---|---|---|
| 0 | 12/29/2017 | 308.05 | 318.00 | 306.35 | 316.05 | 316.40 | 6874520 | 21586.26 |
| 1 | 12/28/2017 | 306.50 | 309.30 | 303.55 | 306.80 | 306.60 | 4620000 | 14164.22 |
| 2 | 12/27/2017 | 305.50 | 308.80 | 302.50 | 305.65 | 305.55 | 3315278 | 10164.51 |
| 3 | 12/26/2017 | 303.70 | 307.70 | 300.55 | 305.50 | 305.95 | 3956481 | 12073.55 |
| 4 | 12/22/2017 | 301.00 | 306.35 | 299.70 | 302.80 | 302.75 | 4703911 | 14271.47 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 243 | 1/6/2017 | 125.40 | 126.95 | 124.05 | 124.50 | 124.65 | 1255021 | 1573.67 |
| 244 | 1/5/2017 | 124.75 | 125.65 | 124.55 | 125.30 | 125.20 | 503565 | 630.23 |
| 245 | 1/4/2017 | 125.00 | 126.05 | 123.80 | 124.10 | 124.45 | 656213 | 818.98 |
| 246 | 1/3/2017 | 122.80 | 125.40 | 122.80 | 124.75 | 124.45 | 580430 | 721.93 |
| 247 | 1/2/2017 | 122.80 | 124.20 | 121.35 | 123.55 | 123.55 | 1102178 | 1354.06 |
248 rows × 8 columns
real_stock_price
array([[308.05],
[306.5 ],
[305.5 ],
[303.7 ],
[301. ],
[302.25],
[301. ],
[292. ],
[280. ],
[289. ],
[288. ],
[290. ],
[298.6 ],
[290. ],
[287.55],
[276. ],
[278.9 ],
[279.4 ],
[285. ],
[288.45],
[278.9 ],
[287.3 ],
[277. ],
[277.5 ],
[275.05],
[273.15],
[273.5 ],
[275.9 ],
[261.15],
[252.25],
[254. ],
[237.9 ],
[234.7 ],
[236.3 ],
[240.7 ],
[225.8 ],
[226. ],
[235. ],
[224.9 ],
[227. ],
[227.9 ],
[226.95],
[226.95],
[225. ],
[211.3 ],
[204.95],
[211. ],
[204. ],
[205.5 ],
[207.9 ],
[211.45],
[210.4 ],
[210.3 ],
[211. ],
[211. ],
[209.45],
[209.5 ],
[209.65],
[210. ],
[204.95],
[207.3 ],
[207.25],
[204.5 ],
[199.05],
[206.5 ],
[196. ],
[201.45],
[211. ],
[214. ],
[216.4 ],
[213. ],
[212.65],
[211. ],
[208.5 ],
[214.65],
[212. ],
[209.2 ],
[206.1 ],
[207.95],
[195.1 ],
[194.3 ],
[198. ],
[198.25],
[198.5 ],
[193. ],
[192.95],
[195. ],
[197.3 ],
[191.5 ],
[197.15],
[196.65],
[189.8 ],
[194. ],
[182.5 ],
[166. ],
[155.1 ],
[161.85],
[165.5 ],
[171.8 ],
[167. ],
[164.7 ],
[165.95],
[168. ],
[171. ],
[169.5 ],
[167.9 ],
[173.85],
[172. ],
[170.5 ],
[172.2 ],
[171.9 ],
[175.4 ],
[173.8 ],
[176.1 ],
[174.25],
[175.9 ],
[175.35],
[171.05],
[167.05],
[159. ],
[153.15],
[156.5 ],
[151.5 ],
[153.7 ],
[152.5 ],
[148. ],
[149.95],
[147.25],
[151.15],
[154.6 ],
[157. ],
[156.8 ],
[156.75],
[159. ],
[159.95],
[158.45],
[156.95],
[152.45],
[153.5 ],
[154.9 ],
[155.1 ],
[152.65],
[155. ],
[155.05],
[153.1 ],
[148.35],
[148.55],
[145. ],
[147.2 ],
[144. ],
[143.45],
[145.2 ],
[148.05],
[149.8 ],
[149.5 ],
[152. ],
[154. ],
[154.6 ],
[152.65],
[154. ],
[155. ],
[154.9 ],
[153.2 ],
[152.8 ],
[156.5 ],
[156.3 ],
[158.85],
[154.3 ],
[153.4 ],
[155. ],
[152.45],
[152.4 ],
[151. ],
[153.5 ],
[150.25],
[147.9 ],
[149.1 ],
[151. ],
[152. ],
[153.95],
[151.5 ],
[151.5 ],
[152.8 ],
[152.9 ],
[152.8 ],
[150.65],
[151.45],
[151.45],
[151.7 ],
[149.9 ],
[151. ],
[152.9 ],
[143.9 ],
[145.55],
[147. ],
[144.8 ],
[145.7 ],
[141.6 ],
[139.6 ],
[140.2 ],
[140.85],
[140.6 ],
[142.4 ],
[143. ],
[143.95],
[141.1 ],
[145.45],
[138.5 ],
[139.15],
[139. ],
[140.05],
[142.55],
[142.15],
[141. ],
[140.9 ],
[139.2 ],
[140.9 ],
[142.4 ],
[144.9 ],
[143.95],
[146. ],
[141.95],
[142.1 ],
[141.9 ],
[138.2 ],
[132.45],
[129.05],
[130.5 ],
[129.1 ],
[128.35],
[129.25],
[128.8 ],
[127.6 ],
[130.25],
[129.8 ],
[128. ],
[128.6 ],
[128.2 ],
[126.9 ],
[128.3 ],
[125. ],
[125.5 ],
[124.75],
[125.4 ],
[124.75],
[125. ],
[122.8 ],
[122.8 ]])
Getting predicted stock price of 2017 TATAGLOBAL
dataset_total = pd.concat((dataset_train['Open'], dataset_test['Open']), axis = 0)
inputs = dataset_total[len(dataset_total) - len(dataset_test) - 60:].values
inputs = inputs.reshape(-1,1)
inputs = sc.transform(inputs)
X_test = []
for i in range(60, 309):
X_test.append(inputs[i-60:i, 0])
X_test = np.array(X_test)
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
predicted_stock_price = regressor.predict(X_test)
predicted_stock_price = sc.inverse_transform(predicted_stock_price)
8/8 [==============================] - 0s 21ms/step
plt.plot(real_stock_price, color = 'red', label = 'Real TATAGLOBAL Stock Price')
plt.plot(predicted_stock_price, color = 'blue', label = 'Predicted TATAGLOBAL Stock Price')
plt.title('TATA Stock Price Prediction')
plt.xlabel('Time')
plt.ylabel('TATA Stock Price')
plt.legend()
plt.show()